# Bank Churn using Nueral Network The points distribution for this case is as follows: Read the dataset Drop the columns which are unique for all users like IDs (5points) Distinguish the features and target variable(5points) Divide the data set into training and test sets (5points) Normalize the train and test data (10points) Initialize & build the model. Identify the points of improvement and implement the same. Note that you need to demonstrate at least two models(the original and the improved one) and highlight the differences to complete this point. You can also demonstrate more models. (20points) Predict the results using 0.5 as a threshold. Note that you need to first predict the probability and then predict classes using the given threshold (10points) Print the Accuracy score and confusion matrix (5points)
In [50]:
# Start Python Imports
import math, time, random, datetime

# Data Manipulation
import numpy as np
import pandas as pd

# Visualization 
import matplotlib.pyplot as plt
import missingno
import seaborn as sns

# Preprocessing
from sklearn.preprocessing import OneHotEncoder, LabelEncoder, label_binarize, StandardScaler
from sklearn.preprocessing import MinMaxScaler


# Machine learning

from sklearn.model_selection import train_test_split, cross_val_score
from sklearn import model_selection, tree, preprocessing, metrics, linear_model
from sklearn.svm import SVC
from sklearn.linear_model import Perceptron
from sklearn.svm import LinearSVC
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.linear_model import LinearRegression, LogisticRegression, SGDClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier

from sklearn.metrics import confusion_matrix

# Use GridSearchCV to find the best parameters.
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import confusion_matrix, accuracy_score, classification_report,roc_curve, auc
# Tensor Flow and Keras
import tensorflow as tf
import keras
from keras.models import Sequential 
from keras.layers import Dense
from keras.layers import Dropout
In [51]:
# loading data set
data_raw = pd.read_csv("bank.csv")
In [52]:
data_raw.head(10)
Out[52]:
RowNumber CustomerId Surname CreditScore Geography Gender Age Tenure Balance NumOfProducts HasCrCard IsActiveMember EstimatedSalary Exited
0 1 15634602 Hargrave 619 France Female 42 2 0.00 1 1 1 101348.88 1
1 2 15647311 Hill 608 Spain Female 41 1 83807.86 1 0 1 112542.58 0
2 3 15619304 Onio 502 France Female 42 8 159660.80 3 1 0 113931.57 1
3 4 15701354 Boni 699 France Female 39 1 0.00 2 0 0 93826.63 0
4 5 15737888 Mitchell 850 Spain Female 43 2 125510.82 1 1 1 79084.10 0
5 6 15574012 Chu 645 Spain Male 44 8 113755.78 2 1 0 149756.71 1
6 7 15592531 Bartlett 822 France Male 50 7 0.00 2 1 1 10062.80 0
7 8 15656148 Obinna 376 Germany Female 29 4 115046.74 4 1 0 119346.88 1
8 9 15792365 He 501 France Male 44 4 142051.07 2 0 1 74940.50 0
9 10 15592389 H? 684 France Male 27 2 134603.88 1 1 1 71725.73 0
In [53]:
# making copy of raw data to do preprocesing 
data = pd.DataFrame.copy(data_raw)
In [54]:
# shape of data set
data.shape
Out[54]:
(10000, 14)
In [55]:
# description of all variables
data.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 10000 entries, 0 to 9999
Data columns (total 14 columns):
 #   Column           Non-Null Count  Dtype  
---  ------           --------------  -----  
 0   RowNumber        10000 non-null  int64  
 1   CustomerId       10000 non-null  int64  
 2   Surname          10000 non-null  object 
 3   CreditScore      10000 non-null  int64  
 4   Geography        10000 non-null  object 
 5   Gender           10000 non-null  object 
 6   Age              10000 non-null  int64  
 7   Tenure           10000 non-null  int64  
 8   Balance          10000 non-null  float64
 9   NumOfProducts    10000 non-null  int64  
 10  HasCrCard        10000 non-null  int64  
 11  IsActiveMember   10000 non-null  int64  
 12  EstimatedSalary  10000 non-null  float64
 13  Exited           10000 non-null  int64  
dtypes: float64(2), int64(9), object(3)
memory usage: 1.1+ MB
In [56]:
# Check null data
data.isnull().sum()
Out[56]:
RowNumber          0
CustomerId         0
Surname            0
CreditScore        0
Geography          0
Gender             0
Age                0
Tenure             0
Balance            0
NumOfProducts      0
HasCrCard          0
IsActiveMember     0
EstimatedSalary    0
Exited             0
dtype: int64
In [57]:
#Descripbe data 

data.describe().T
Out[57]:
count mean std min 25% 50% 75% max
RowNumber 10000.0 5.000500e+03 2886.895680 1.00 2500.75 5.000500e+03 7.500250e+03 10000.00
CustomerId 10000.0 1.569094e+07 71936.186123 15565701.00 15628528.25 1.569074e+07 1.575323e+07 15815690.00
CreditScore 10000.0 6.505288e+02 96.653299 350.00 584.00 6.520000e+02 7.180000e+02 850.00
Age 10000.0 3.892180e+01 10.487806 18.00 32.00 3.700000e+01 4.400000e+01 92.00
Tenure 10000.0 5.012800e+00 2.892174 0.00 3.00 5.000000e+00 7.000000e+00 10.00
Balance 10000.0 7.648589e+04 62397.405202 0.00 0.00 9.719854e+04 1.276442e+05 250898.09
NumOfProducts 10000.0 1.530200e+00 0.581654 1.00 1.00 1.000000e+00 2.000000e+00 4.00
HasCrCard 10000.0 7.055000e-01 0.455840 0.00 0.00 1.000000e+00 1.000000e+00 1.00
IsActiveMember 10000.0 5.151000e-01 0.499797 0.00 0.00 1.000000e+00 1.000000e+00 1.00
EstimatedSalary 10000.0 1.000902e+05 57510.492818 11.58 51002.11 1.001939e+05 1.493882e+05 199992.48
Exited 10000.0 2.037000e-01 0.402769 0.00 0.00 0.000000e+00 0.000000e+00 1.00
In [ ]:
 
In [58]:
data.head(10)
Out[58]:
RowNumber CustomerId Surname CreditScore Geography Gender Age Tenure Balance NumOfProducts HasCrCard IsActiveMember EstimatedSalary Exited
0 1 15634602 Hargrave 619 France Female 42 2 0.00 1 1 1 101348.88 1
1 2 15647311 Hill 608 Spain Female 41 1 83807.86 1 0 1 112542.58 0
2 3 15619304 Onio 502 France Female 42 8 159660.80 3 1 0 113931.57 1
3 4 15701354 Boni 699 France Female 39 1 0.00 2 0 0 93826.63 0
4 5 15737888 Mitchell 850 Spain Female 43 2 125510.82 1 1 1 79084.10 0
5 6 15574012 Chu 645 Spain Male 44 8 113755.78 2 1 0 149756.71 1
6 7 15592531 Bartlett 822 France Male 50 7 0.00 2 1 1 10062.80 0
7 8 15656148 Obinna 376 Germany Female 29 4 115046.74 4 1 0 119346.88 1
8 9 15792365 He 501 France Male 44 4 142051.07 2 0 1 74940.50 0
9 10 15592389 H? 684 France Male 27 2 134603.88 1 1 1 71725.73 0
In [59]:
# creating numerical, categorical & target columns lists
categorical_features = [features for features in data.columns if data[features].dtypes == "O"]  
print(len(categorical_features),"no of categorical variables")

numerical_features = ["CreditScore", "Age", "Balance", "EstimatedSalary"]
print(len(numerical_features),"no of numerical variables")

discrete_features = ["Tenure","NumOfProducts","HasCrCard","IsActiveMember"]
print(len(discrete_features),"no of discrete variables")

target = ["Exited"]
print("target has",len(target),"variable")
3 no of categorical variables
4 no of numerical variables
4 no of discrete variables
target has 1 variable
In [60]:
# checking all unique values of categorical variable
for feature in categorical_features:
    print(feature,"has",len(data[feature].value_counts()),"unique values","\n", data[feature].value_counts())
Surname has 2932 unique values 
 Smith        32
Martin       29
Scott        29
Walker       28
Brown        26
             ..
Ndubuisi      1
Weller        1
Lanford       1
Gresswell     1
Chadwick      1
Name: Surname, Length: 2932, dtype: int64
Geography has 3 unique values 
 France     5014
Germany    2509
Spain      2477
Name: Geography, dtype: int64
Gender has 2 unique values 
 Male      5457
Female    4543
Name: Gender, dtype: int64
In [61]:
# checking all unique values of discrete variable
for feature in discrete_features:
    print(feature,"has",len(data[feature].value_counts()),"unique values","\n", data[feature].value_counts())
Tenure has 11 unique values 
 2     1048
1     1035
7     1028
8     1025
5     1012
3     1009
4      989
9      984
6      967
10     490
0      413
Name: Tenure, dtype: int64
NumOfProducts has 4 unique values 
 1    5084
2    4590
3     266
4      60
Name: NumOfProducts, dtype: int64
HasCrCard has 2 unique values 
 1    7055
0    2945
Name: HasCrCard, dtype: int64
IsActiveMember has 2 unique values 
 1    5151
0    4849
Name: IsActiveMember, dtype: int64
In [62]:
# EDA 
# need to check the distribution of output classes
#  Output classes need to be balanced
data_EDA = pd.DataFrame(data)
sns.countplot(x="Exited", data=data_EDA)
plt.title("Distribution of output classes")
Out[62]:
Text(0.5, 1.0, 'Distribution of output classes')
In [63]:
# Univariate Analysis using histogram
# understanding more in histogram of caontinous variables
for features in numerical_features:
    data_EDA = data.copy()
    sns.distplot(data_EDA[features], hist=True, kde=True, rug=True)
    plt.xlabel(features)
    plt.xticks(rotation=90)
    plt.title("Distribution of numerical variables")
    plt.show()
In [64]:
#Age variable has right skewed distribution which means most customers are 25-50 years age group. 
# Lot of customers have zero account balance.
In [65]:
# relationship between numerical variable vs target
for feature in numerical_features:
    data_EDA=data.copy()
    sns.barplot(x="Exited", y=feature, data=data_EDA, order=[1,0], palette=colors1)
    plt.xlabel('Exited')
    plt.xticks(rotation=90)
    plt.ylabel(feature)
    plt.title(feature)
    plt.show()
In [66]:
#The average age group of exited customers is 45 years 
# 650 is avearge credit score of existed customer 
# 86000 is average balance & 100000 is average salary of exited cust
In [67]:
# Percentage of customer churn is ~20.4%
value_counts = pd.value_counts(data_EDA['Exited'])
plt.figure(figsize = (6,6))
value_counts.plot(kind = 'pie', explode = [0,0.1],autopct='%1.1f%%', shadow=True)
plt.title('Proportion of customer churned and retained')
plt.show()
value_counts
Out[67]:
0    7963
1    2037
Name: Exited, dtype: int64
In [74]:
# Multivariate Analysis 
# checking relationship between categorical variables and target

for feature in categorical_features + discrete_features:
    data_EDA=data.copy()
    sns.countplot(x='Exited', hue=feature, data=data_EDA, order=[1,0])
    plt.xlabel('Exited')
    plt.xticks(rotation=90)
    plt.ylabel('Count')
    plt.title(feature)
    plt.show()
In [75]:
#Germans and french people more likely to exit 
# Women have more exit rate 
# Customer who has credit card are more likely to leave 
# Not active customer is leaving 
# One year old customers are more likely to leave
In [76]:
# corelation plot 
sns.pairplot(data_EDA, kind="scatter", hue="Exited", plot_kws=dict(s=120, edgecolor="white", linewidth=2.5))
plt.show()
In [71]:
plt.figure(figsize = (15,15))
sns.heatmap(data_EDA.corr(), annot = True, cmap = 'RdYlGn')
Out[71]:
<matplotlib.axes._subplots.AxesSubplot at 0x1a51216c90>
In [77]:
# data preparation...creating independent and dependent features
# we do not consider Row Number and Customer Id as they are not needed .
X = data_EDA.iloc[:,3:-1].values
y = data_EDA.iloc[:,-1].values
In [78]:
X
Out[78]:
array([[619, 'France', 'Female', ..., 1, 1, 101348.88],
       [608, 'Spain', 'Female', ..., 0, 1, 112542.58],
       [502, 'France', 'Female', ..., 1, 0, 113931.57],
       ...,
       [709, 'France', 'Female', ..., 0, 1, 42085.58],
       [772, 'Germany', 'Male', ..., 1, 0, 92888.52],
       [792, 'France', 'Female', ..., 1, 0, 38190.78]], dtype=object)
In [79]:
y
Out[79]:
array([1, 0, 1, ..., 1, 1, 0])
In [80]:
# Label encoding Gender 
from sklearn.preprocessing import LabelEncoder
le = LabelEncoder()
X[:, 2] = le.fit_transform(X[:, 2])
In [81]:
print(X)
[[619 'France' 0 ... 1 1 101348.88]
 [608 'Spain' 0 ... 0 1 112542.58]
 [502 'France' 0 ... 1 0 113931.57]
 ...
 [709 'France' 0 ... 0 1 42085.58]
 [772 'Germany' 1 ... 1 0 92888.52]
 [792 'France' 0 ... 1 0 38190.78]]
In [82]:
# one hot encoding Geography
from sklearn.compose import ColumnTransformer
from sklearn.preprocessing import OneHotEncoder
ct = ColumnTransformer(transformers=[('encoder', OneHotEncoder(), [1])], remainder='passthrough')
X = np.array(ct.fit_transform(X))
In [83]:
X
Out[83]:
array([[1.0, 0.0, 0.0, ..., 1, 1, 101348.88],
       [0.0, 0.0, 1.0, ..., 0, 1, 112542.58],
       [1.0, 0.0, 0.0, ..., 1, 0, 113931.57],
       ...,
       [1.0, 0.0, 0.0, ..., 0, 1, 42085.58],
       [0.0, 1.0, 0.0, ..., 1, 0, 92888.52],
       [1.0, 0.0, 0.0, ..., 1, 0, 38190.78]], dtype=object)
In [84]:
y
Out[84]:
array([1, 0, 1, ..., 1, 1, 0])
In [86]:
# splitting train and test data 
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X,y, test_size = 0.2, random_state = 0)
In [87]:
# Feature scaling using Standard scaler
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
In [88]:
# Modeliing , building with ANN

Model 1 using Sequential Keras , RELU , batch size= 10 and EPoch 50

In [89]:
import tensorflow as tf
In [90]:
# initialize ANN with sequential model 
ann = tf.keras.models.Sequential()
In [91]:
# adding input and first hidden layer use relu
ann.add(tf.keras.layers.Dense(units=6, activation='relu'))
In [92]:
# adding second hidden laer using relu
ann.add(tf.keras.layers.Dense(units = 6, activation = 'relu'))
In [93]:
# adding the output layer
ann.add(tf.keras.layers.Dense(units = 1, activation = 'sigmoid'))
In [94]:
# Compile the ANN
ann.compile(optimizer  ='adam', loss = 'binary_crossentropy', metrics = ['accuracy'])
In [95]:
# Fitting ANN to train set with batch size 10 and epocs as 50
model_history = ann.fit(X_train, y_train,validation_split=0.33,batch_size = 10, epochs = 50)
Epoch 1/50
536/536 [==============================] - 1s 1ms/step - loss: 0.5103 - accuracy: 0.7858 - val_loss: 0.4617 - val_accuracy: 0.7955
Epoch 2/50
536/536 [==============================] - 1s 1ms/step - loss: 0.4489 - accuracy: 0.7962 - val_loss: 0.4401 - val_accuracy: 0.7955
Epoch 3/50
536/536 [==============================] - 1s 1ms/step - loss: 0.4311 - accuracy: 0.7962 - val_loss: 0.4298 - val_accuracy: 0.7955
Epoch 4/50
536/536 [==============================] - 1s 1ms/step - loss: 0.4226 - accuracy: 0.7962 - val_loss: 0.4248 - val_accuracy: 0.7955
Epoch 5/50
536/536 [==============================] - 1s 1ms/step - loss: 0.4166 - accuracy: 0.7962 - val_loss: 0.4213 - val_accuracy: 0.7955
Epoch 6/50
536/536 [==============================] - 1s 1ms/step - loss: 0.4113 - accuracy: 0.8085 - val_loss: 0.4179 - val_accuracy: 0.8114
Epoch 7/50
536/536 [==============================] - 1s 1ms/step - loss: 0.4065 - accuracy: 0.8175 - val_loss: 0.4153 - val_accuracy: 0.8111
Epoch 8/50
536/536 [==============================] - 1s 1ms/step - loss: 0.4013 - accuracy: 0.8182 - val_loss: 0.4114 - val_accuracy: 0.8126
Epoch 9/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3957 - accuracy: 0.8252 - val_loss: 0.4074 - val_accuracy: 0.8152
Epoch 10/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3907 - accuracy: 0.8253 - val_loss: 0.4059 - val_accuracy: 0.8183
Epoch 11/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3864 - accuracy: 0.8259 - val_loss: 0.4010 - val_accuracy: 0.8270
Epoch 12/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3813 - accuracy: 0.8373 - val_loss: 0.3975 - val_accuracy: 0.8289
Epoch 13/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3774 - accuracy: 0.8388 - val_loss: 0.3968 - val_accuracy: 0.8289
Epoch 14/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3736 - accuracy: 0.8433 - val_loss: 0.3949 - val_accuracy: 0.8228
Epoch 15/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3711 - accuracy: 0.8444 - val_loss: 0.3924 - val_accuracy: 0.8292
Epoch 16/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3677 - accuracy: 0.8453 - val_loss: 0.3924 - val_accuracy: 0.8289
Epoch 17/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3655 - accuracy: 0.8453 - val_loss: 0.3889 - val_accuracy: 0.8319
Epoch 18/50
536/536 [==============================] - 1s 965us/step - loss: 0.3635 - accuracy: 0.8462 - val_loss: 0.3877 - val_accuracy: 0.8330
Epoch 19/50
536/536 [==============================] - 1s 995us/step - loss: 0.3617 - accuracy: 0.8487 - val_loss: 0.3856 - val_accuracy: 0.8368
Epoch 20/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3601 - accuracy: 0.8464 - val_loss: 0.3829 - val_accuracy: 0.8357
Epoch 21/50
536/536 [==============================] - 1s 980us/step - loss: 0.3578 - accuracy: 0.8503 - val_loss: 0.3822 - val_accuracy: 0.8360
Epoch 22/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3563 - accuracy: 0.8487 - val_loss: 0.3788 - val_accuracy: 0.8398
Epoch 23/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3548 - accuracy: 0.8498 - val_loss: 0.3785 - val_accuracy: 0.8387
Epoch 24/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3530 - accuracy: 0.8530 - val_loss: 0.3767 - val_accuracy: 0.8398
Epoch 25/50
536/536 [==============================] - 1s 979us/step - loss: 0.3522 - accuracy: 0.8498 - val_loss: 0.3752 - val_accuracy: 0.8444
Epoch 26/50
536/536 [==============================] - 1s 954us/step - loss: 0.3508 - accuracy: 0.8518 - val_loss: 0.3762 - val_accuracy: 0.8413
Epoch 27/50
536/536 [==============================] - 0s 927us/step - loss: 0.3498 - accuracy: 0.8541 - val_loss: 0.3719 - val_accuracy: 0.8432
Epoch 28/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3485 - accuracy: 0.8528 - val_loss: 0.3732 - val_accuracy: 0.8440
Epoch 29/50
536/536 [==============================] - 1s 944us/step - loss: 0.3477 - accuracy: 0.8530 - val_loss: 0.3698 - val_accuracy: 0.8451
Epoch 30/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3459 - accuracy: 0.8531 - val_loss: 0.3722 - val_accuracy: 0.8463
Epoch 31/50
536/536 [==============================] - 1s 989us/step - loss: 0.3461 - accuracy: 0.8546 - val_loss: 0.3693 - val_accuracy: 0.8482
Epoch 32/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3454 - accuracy: 0.8552 - val_loss: 0.3699 - val_accuracy: 0.8485
Epoch 33/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3443 - accuracy: 0.8572 - val_loss: 0.3669 - val_accuracy: 0.8516
Epoch 34/50
536/536 [==============================] - 1s 937us/step - loss: 0.3424 - accuracy: 0.8571 - val_loss: 0.3721 - val_accuracy: 0.8474
Epoch 35/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3417 - accuracy: 0.8569 - val_loss: 0.3674 - val_accuracy: 0.8497
Epoch 36/50
536/536 [==============================] - 1s 970us/step - loss: 0.3412 - accuracy: 0.8589 - val_loss: 0.3655 - val_accuracy: 0.8504
Epoch 37/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3393 - accuracy: 0.8572 - val_loss: 0.3629 - val_accuracy: 0.8542
Epoch 38/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3393 - accuracy: 0.8582 - val_loss: 0.3657 - val_accuracy: 0.8523
Epoch 39/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3380 - accuracy: 0.8584 - val_loss: 0.3678 - val_accuracy: 0.8550
Epoch 40/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3381 - accuracy: 0.8572 - val_loss: 0.3630 - val_accuracy: 0.8516
Epoch 41/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3367 - accuracy: 0.8606 - val_loss: 0.3618 - val_accuracy: 0.8531
Epoch 42/50
536/536 [==============================] - 1s 983us/step - loss: 0.3367 - accuracy: 0.8580 - val_loss: 0.3632 - val_accuracy: 0.8504
Epoch 43/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3362 - accuracy: 0.8593 - val_loss: 0.3615 - val_accuracy: 0.8538
Epoch 44/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3357 - accuracy: 0.8606 - val_loss: 0.3612 - val_accuracy: 0.8546
Epoch 45/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3353 - accuracy: 0.8591 - val_loss: 0.3636 - val_accuracy: 0.8512
Epoch 46/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3351 - accuracy: 0.8584 - val_loss: 0.3633 - val_accuracy: 0.8527
Epoch 47/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3349 - accuracy: 0.8606 - val_loss: 0.3636 - val_accuracy: 0.8497
Epoch 48/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3340 - accuracy: 0.8627 - val_loss: 0.3652 - val_accuracy: 0.8497
Epoch 49/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3341 - accuracy: 0.8606 - val_loss: 0.3614 - val_accuracy: 0.8538
Epoch 50/50
536/536 [==============================] - 1s 1ms/step - loss: 0.3336 - accuracy: 0.8606 - val_loss: 0.3646 - val_accuracy: 0.8535
In [96]:
# Visualize accuracy of model 
plt.plot(model_history.history['accuracy'])
plt.plot(model_history.history['val_accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()
In [97]:
# vizualize the loss
plt.plot(model_history.history['loss'])
plt.plot(model_history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper right')
plt.show()
In [98]:
# Predict accuracy with threshold of 0.5
y_pred = ann.predict(X_test)
y_pred =(y_pred > 0.5)
In [99]:
# create confusion matrix
from sklearn.metrics import confusion_matrix, accuracy_score, classification_report
cm = confusion_matrix(y_test, y_pred)
print(cm)
print(accuracy_score(y_test, y_pred))
[[1497   98]
 [ 198  207]]
0.852
In [101]:
cr=metrics.classification_report(y_test,y_pred)
print(cr)
              precision    recall  f1-score   support

           0       0.88      0.94      0.91      1595
           1       0.68      0.51      0.58       405

    accuracy                           0.85      2000
   macro avg       0.78      0.72      0.75      2000
weighted avg       0.84      0.85      0.84      2000

Model 2: Hyper-parameter tuning using Grid search with multiple learning rates and 5 fold validation

In [128]:
# tried with my laptop to run multiple batch sizez and epochs but it was taking too long to execute 
# ended up with {'batch_size': 10, 'epochs': 150, 'optimizers': 'Adam'}
# 5 fold validation with n_jobs=-3 for faster speed.

from sklearn.model_selection import GridSearchCV
def build_classifier(lrs):
    classifier = Sequential()
    
    classifier.add(Dense(6, activation = "relu"))
    classifier.add(Dense(1, activation = "sigmoid"))
    classifier.compile(optimizer = keras.optimizers.Adam(lr = lrs), loss = "binary_crossentropy", metrics = ["accuracy"])
    return classifier
classifier = KerasClassifier(build_fn = build_classifier, batch_size = 10, epochs = 150)
parameters = {'lrs':[ 0.001, 0.003]}
grid_search = GridSearchCV(estimator = classifier,
                           param_grid = parameters,
                           scoring = 'accuracy',
                           cv = 5 ,n_jobs=-3)
grid_search = grid_search.fit(X_train, y_train)
best_parameters = grid_search.best_params_
best_accuracy = grid_search.best_score_
Epoch 1/150
800/800 [==============================] - 1s 688us/step - loss: 0.5509 - accuracy: 0.7469
Epoch 2/150
800/800 [==============================] - 1s 701us/step - loss: 0.4413 - accuracy: 0.8135
Epoch 3/150
800/800 [==============================] - 1s 646us/step - loss: 0.4209 - accuracy: 0.8254
Epoch 4/150
800/800 [==============================] - 1s 746us/step - loss: 0.4084 - accuracy: 0.8298
Epoch 5/150
800/800 [==============================] - 1s 692us/step - loss: 0.3979 - accuracy: 0.8381
Epoch 6/150
800/800 [==============================] - 1s 740us/step - loss: 0.3886 - accuracy: 0.8409
Epoch 7/150
800/800 [==============================] - 1s 698us/step - loss: 0.3800 - accuracy: 0.8436
Epoch 8/150
800/800 [==============================] - 1s 719us/step - loss: 0.3721 - accuracy: 0.8496
Epoch 9/150
800/800 [==============================] - 1s 754us/step - loss: 0.3665 - accuracy: 0.8531
Epoch 10/150
800/800 [==============================] - 1s 721us/step - loss: 0.3617 - accuracy: 0.8550
Epoch 11/150
800/800 [==============================] - 1s 760us/step - loss: 0.3581 - accuracy: 0.8591
Epoch 12/150
800/800 [==============================] - 1s 686us/step - loss: 0.3555 - accuracy: 0.8593
Epoch 13/150
800/800 [==============================] - 1s 747us/step - loss: 0.3532 - accuracy: 0.8581
Epoch 14/150
800/800 [==============================] - 1s 717us/step - loss: 0.3516 - accuracy: 0.8590
Epoch 15/150
800/800 [==============================] - 1s 712us/step - loss: 0.3507 - accuracy: 0.8591
Epoch 16/150
800/800 [==============================] - 1s 744us/step - loss: 0.3494 - accuracy: 0.8581
Epoch 17/150
800/800 [==============================] - 1s 657us/step - loss: 0.3488 - accuracy: 0.8594
Epoch 18/150
800/800 [==============================] - 1s 720us/step - loss: 0.3476 - accuracy: 0.8612
Epoch 19/150
800/800 [==============================] - 1s 671us/step - loss: 0.3470 - accuracy: 0.8599
Epoch 20/150
800/800 [==============================] - 1s 794us/step - loss: 0.3467 - accuracy: 0.8601
Epoch 21/150
800/800 [==============================] - 1s 883us/step - loss: 0.3460 - accuracy: 0.8604
Epoch 22/150
800/800 [==============================] - 1s 721us/step - loss: 0.3455 - accuracy: 0.8627
Epoch 23/150
800/800 [==============================] - 1s 720us/step - loss: 0.3449 - accuracy: 0.8602
Epoch 24/150
800/800 [==============================] - 1s 659us/step - loss: 0.3449 - accuracy: 0.8614
Epoch 25/150
800/800 [==============================] - 1s 824us/step - loss: 0.3443 - accuracy: 0.8614
Epoch 26/150
800/800 [==============================] - 1s 743us/step - loss: 0.3437 - accuracy: 0.8625
Epoch 27/150
800/800 [==============================] - 1s 707us/step - loss: 0.3439 - accuracy: 0.8595
Epoch 28/150
800/800 [==============================] - 1s 718us/step - loss: 0.3436 - accuracy: 0.8616
Epoch 29/150
800/800 [==============================] - 1s 651us/step - loss: 0.3435 - accuracy: 0.8610
Epoch 30/150
800/800 [==============================] - 1s 739us/step - loss: 0.3431 - accuracy: 0.8622
Epoch 31/150
800/800 [==============================] - 1s 671us/step - loss: 0.3431 - accuracy: 0.8594
Epoch 32/150
800/800 [==============================] - 1s 751us/step - loss: 0.3428 - accuracy: 0.8610
Epoch 33/150
800/800 [==============================] - 1s 753us/step - loss: 0.3424 - accuracy: 0.8606
Epoch 34/150
800/800 [==============================] - 1s 689us/step - loss: 0.3424 - accuracy: 0.8622
Epoch 35/150
800/800 [==============================] - 1s 777us/step - loss: 0.3420 - accuracy: 0.8609
Epoch 36/150
800/800 [==============================] - 1s 736us/step - loss: 0.3419 - accuracy: 0.8611
Epoch 37/150
800/800 [==============================] - 1s 834us/step - loss: 0.3418 - accuracy: 0.8621
Epoch 38/150
800/800 [==============================] - 1s 822us/step - loss: 0.3416 - accuracy: 0.8619
Epoch 39/150
800/800 [==============================] - 1s 684us/step - loss: 0.3413 - accuracy: 0.8615
Epoch 40/150
800/800 [==============================] - 1s 762us/step - loss: 0.3411 - accuracy: 0.8606
Epoch 41/150
800/800 [==============================] - 1s 677us/step - loss: 0.3404 - accuracy: 0.8626
Epoch 42/150
800/800 [==============================] - 1s 687us/step - loss: 0.3403 - accuracy: 0.8622
Epoch 43/150
800/800 [==============================] - 1s 774us/step - loss: 0.3395 - accuracy: 0.8622
Epoch 44/150
800/800 [==============================] - 1s 751us/step - loss: 0.3393 - accuracy: 0.8619
Epoch 45/150
800/800 [==============================] - 1s 692us/step - loss: 0.3386 - accuracy: 0.8621
Epoch 46/150
800/800 [==============================] - 1s 650us/step - loss: 0.3388 - accuracy: 0.8620
Epoch 47/150
800/800 [==============================] - 1s 742us/step - loss: 0.3382 - accuracy: 0.8619
Epoch 48/150
800/800 [==============================] - 1s 688us/step - loss: 0.3379 - accuracy: 0.8625
Epoch 49/150
800/800 [==============================] - 1s 685us/step - loss: 0.3377 - accuracy: 0.8621
Epoch 50/150
800/800 [==============================] - 1s 716us/step - loss: 0.3377 - accuracy: 0.8634
Epoch 51/150
800/800 [==============================] - 1s 661us/step - loss: 0.3375 - accuracy: 0.8612
Epoch 52/150
800/800 [==============================] - 1s 666us/step - loss: 0.3374 - accuracy: 0.8620
Epoch 53/150
800/800 [==============================] - 1s 669us/step - loss: 0.3372 - accuracy: 0.8624
Epoch 54/150
800/800 [==============================] - 1s 712us/step - loss: 0.3370 - accuracy: 0.8633
Epoch 55/150
800/800 [==============================] - 1s 643us/step - loss: 0.3371 - accuracy: 0.8622
Epoch 56/150
800/800 [==============================] - 1s 730us/step - loss: 0.3371 - accuracy: 0.8616
Epoch 57/150
800/800 [==============================] - 1s 648us/step - loss: 0.3367 - accuracy: 0.8619
Epoch 58/150
800/800 [==============================] - 1s 730us/step - loss: 0.3364 - accuracy: 0.8626
Epoch 59/150
800/800 [==============================] - 1s 686us/step - loss: 0.3360 - accuracy: 0.8615
Epoch 60/150
800/800 [==============================] - 1s 661us/step - loss: 0.3363 - accuracy: 0.8615
Epoch 61/150
800/800 [==============================] - 1s 715us/step - loss: 0.3361 - accuracy: 0.8648
Epoch 62/150
800/800 [==============================] - 1s 649us/step - loss: 0.3359 - accuracy: 0.8625
Epoch 63/150
800/800 [==============================] - 1s 713us/step - loss: 0.3360 - accuracy: 0.8621
Epoch 64/150
800/800 [==============================] - 1s 652us/step - loss: 0.3359 - accuracy: 0.8620
Epoch 65/150
800/800 [==============================] - 1s 738us/step - loss: 0.3360 - accuracy: 0.8634
Epoch 66/150
800/800 [==============================] - 1s 655us/step - loss: 0.3358 - accuracy: 0.8609
Epoch 67/150
800/800 [==============================] - 1s 741us/step - loss: 0.3357 - accuracy: 0.8633
Epoch 68/150
800/800 [==============================] - 1s 725us/step - loss: 0.3352 - accuracy: 0.8643
Epoch 69/150
800/800 [==============================] - 1s 657us/step - loss: 0.3351 - accuracy: 0.8619
Epoch 70/150
800/800 [==============================] - 1s 825us/step - loss: 0.3353 - accuracy: 0.8608
Epoch 71/150
800/800 [==============================] - 1s 679us/step - loss: 0.3353 - accuracy: 0.8631
Epoch 72/150
800/800 [==============================] - 1s 702us/step - loss: 0.3351 - accuracy: 0.8624
Epoch 73/150
800/800 [==============================] - 1s 651us/step - loss: 0.3354 - accuracy: 0.8620
Epoch 74/150
800/800 [==============================] - 1s 741us/step - loss: 0.3354 - accuracy: 0.8615
Epoch 75/150
800/800 [==============================] - 1s 674us/step - loss: 0.3353 - accuracy: 0.8624
Epoch 76/150
800/800 [==============================] - 1s 665us/step - loss: 0.3348 - accuracy: 0.8640
Epoch 77/150
800/800 [==============================] - 1s 720us/step - loss: 0.3350 - accuracy: 0.8616
Epoch 78/150
800/800 [==============================] - 1s 662us/step - loss: 0.3348 - accuracy: 0.8608
Epoch 79/150
800/800 [==============================] - 1s 708us/step - loss: 0.3351 - accuracy: 0.8639
Epoch 80/150
800/800 [==============================] - 1s 643us/step - loss: 0.3348 - accuracy: 0.8621
Epoch 81/150
800/800 [==============================] - 1s 696us/step - loss: 0.3349 - accuracy: 0.8612
Epoch 82/150
800/800 [==============================] - 1s 640us/step - loss: 0.3342 - accuracy: 0.8620
Epoch 83/150
800/800 [==============================] - 1s 721us/step - loss: 0.3346 - accuracy: 0.8619
Epoch 84/150
800/800 [==============================] - 1s 669us/step - loss: 0.3346 - accuracy: 0.8619
Epoch 85/150
800/800 [==============================] - 1s 669us/step - loss: 0.3345 - accuracy: 0.8621
Epoch 86/150
800/800 [==============================] - 1s 697us/step - loss: 0.3341 - accuracy: 0.8633
Epoch 87/150
800/800 [==============================] - 1s 651us/step - loss: 0.3342 - accuracy: 0.8630
Epoch 88/150
800/800 [==============================] - 1s 676us/step - loss: 0.3342 - accuracy: 0.8644
Epoch 89/150
800/800 [==============================] - 1s 648us/step - loss: 0.3337 - accuracy: 0.8611
Epoch 90/150
800/800 [==============================] - 1s 686us/step - loss: 0.3337 - accuracy: 0.8626
Epoch 91/150
800/800 [==============================] - 1s 638us/step - loss: 0.3342 - accuracy: 0.8622
Epoch 92/150
800/800 [==============================] - 1s 722us/step - loss: 0.3341 - accuracy: 0.8633
Epoch 93/150
800/800 [==============================] - 1s 635us/step - loss: 0.3337 - accuracy: 0.8630
Epoch 94/150
800/800 [==============================] - 1s 695us/step - loss: 0.3340 - accuracy: 0.8625
Epoch 95/150
800/800 [==============================] - 1s 650us/step - loss: 0.3336 - accuracy: 0.8624
Epoch 96/150
800/800 [==============================] - 1s 714us/step - loss: 0.3341 - accuracy: 0.8621
Epoch 97/150
800/800 [==============================] - 1s 694us/step - loss: 0.3334 - accuracy: 0.8640
Epoch 98/150
800/800 [==============================] - 1s 659us/step - loss: 0.3337 - accuracy: 0.8625
Epoch 99/150
800/800 [==============================] - 1s 700us/step - loss: 0.3334 - accuracy: 0.8629
Epoch 100/150
800/800 [==============================] - 1s 643us/step - loss: 0.3338 - accuracy: 0.8627
Epoch 101/150
800/800 [==============================] - 1s 718us/step - loss: 0.3338 - accuracy: 0.8610
Epoch 102/150
800/800 [==============================] - 1s 637us/step - loss: 0.3337 - accuracy: 0.8616
Epoch 103/150
800/800 [==============================] - 1s 704us/step - loss: 0.3337 - accuracy: 0.8634
Epoch 104/150
800/800 [==============================] - 1s 660us/step - loss: 0.3335 - accuracy: 0.8624
Epoch 105/150
800/800 [==============================] - 1s 728us/step - loss: 0.3332 - accuracy: 0.8646
Epoch 106/150
800/800 [==============================] - 1s 638us/step - loss: 0.3332 - accuracy: 0.8630
Epoch 107/150
800/800 [==============================] - 1s 709us/step - loss: 0.3334 - accuracy: 0.8641
Epoch 108/150
800/800 [==============================] - 1s 687us/step - loss: 0.3336 - accuracy: 0.8626
Epoch 109/150
800/800 [==============================] - 1s 637us/step - loss: 0.3335 - accuracy: 0.8619
Epoch 110/150
800/800 [==============================] - 1s 723us/step - loss: 0.3337 - accuracy: 0.8625
Epoch 111/150
800/800 [==============================] - 1s 644us/step - loss: 0.3337 - accuracy: 0.8624
Epoch 112/150
800/800 [==============================] - 1s 687us/step - loss: 0.3331 - accuracy: 0.8639
Epoch 113/150
800/800 [==============================] - 1s 677us/step - loss: 0.3334 - accuracy: 0.8625
Epoch 114/150
800/800 [==============================] - 1s 703us/step - loss: 0.3333 - accuracy: 0.8621
Epoch 115/150
800/800 [==============================] - 1s 635us/step - loss: 0.3333 - accuracy: 0.8648
Epoch 116/150
800/800 [==============================] - 1s 697us/step - loss: 0.3331 - accuracy: 0.8629
Epoch 117/150
800/800 [==============================] - 1s 642us/step - loss: 0.3333 - accuracy: 0.8640
Epoch 118/150
800/800 [==============================] - 1s 716us/step - loss: 0.3337 - accuracy: 0.8635
Epoch 119/150
800/800 [==============================] - 1s 668us/step - loss: 0.3334 - accuracy: 0.8627
Epoch 120/150
800/800 [==============================] - 1s 701us/step - loss: 0.3335 - accuracy: 0.8639
Epoch 121/150
800/800 [==============================] - 1s 742us/step - loss: 0.3334 - accuracy: 0.8646
Epoch 122/150
800/800 [==============================] - 1s 713us/step - loss: 0.3332 - accuracy: 0.8629
Epoch 123/150
800/800 [==============================] - 1s 725us/step - loss: 0.3332 - accuracy: 0.8641
Epoch 124/150
800/800 [==============================] - 1s 671us/step - loss: 0.3332 - accuracy: 0.8639
Epoch 125/150
800/800 [==============================] - 1s 734us/step - loss: 0.3331 - accuracy: 0.8629
Epoch 126/150
800/800 [==============================] - 1s 708us/step - loss: 0.3325 - accuracy: 0.8629
Epoch 127/150
800/800 [==============================] - 1s 648us/step - loss: 0.3331 - accuracy: 0.8631
Epoch 128/150
800/800 [==============================] - 1s 716us/step - loss: 0.3329 - accuracy: 0.8645
Epoch 129/150
800/800 [==============================] - 1s 657us/step - loss: 0.3328 - accuracy: 0.8629
Epoch 130/150
800/800 [==============================] - 1s 692us/step - loss: 0.3330 - accuracy: 0.8627
Epoch 131/150
800/800 [==============================] - 1s 651us/step - loss: 0.3328 - accuracy: 0.8643
Epoch 132/150
800/800 [==============================] - 1s 705us/step - loss: 0.3326 - accuracy: 0.8640
Epoch 133/150
800/800 [==============================] - 1s 634us/step - loss: 0.3329 - accuracy: 0.8643
Epoch 134/150
800/800 [==============================] - 1s 699us/step - loss: 0.3324 - accuracy: 0.8634
Epoch 135/150
800/800 [==============================] - 1s 636us/step - loss: 0.3328 - accuracy: 0.8633
Epoch 136/150
800/800 [==============================] - 1s 692us/step - loss: 0.3323 - accuracy: 0.8630
Epoch 137/150
800/800 [==============================] - 1s 662us/step - loss: 0.3327 - accuracy: 0.8635
Epoch 138/150
800/800 [==============================] - 1s 697us/step - loss: 0.3326 - accuracy: 0.8650
Epoch 139/150
800/800 [==============================] - 1s 695us/step - loss: 0.3326 - accuracy: 0.8622
Epoch 140/150
800/800 [==============================] - 1s 649us/step - loss: 0.3329 - accuracy: 0.8640
Epoch 141/150
800/800 [==============================] - 1s 693us/step - loss: 0.3328 - accuracy: 0.8661
Epoch 142/150
800/800 [==============================] - 1s 659us/step - loss: 0.3327 - accuracy: 0.8652
Epoch 143/150
800/800 [==============================] - 1s 697us/step - loss: 0.3326 - accuracy: 0.8633
Epoch 144/150
800/800 [==============================] - 1s 652us/step - loss: 0.3325 - accuracy: 0.8644
Epoch 145/150
800/800 [==============================] - 1s 691us/step - loss: 0.3324 - accuracy: 0.8648
Epoch 146/150
800/800 [==============================] - 0s 624us/step - loss: 0.3324 - accuracy: 0.8633
Epoch 147/150
800/800 [==============================] - 1s 734us/step - loss: 0.3324 - accuracy: 0.8634
Epoch 148/150
800/800 [==============================] - 1s 658us/step - loss: 0.3324 - accuracy: 0.8655
Epoch 149/150
800/800 [==============================] - 1s 683us/step - loss: 0.3324 - accuracy: 0.8633
Epoch 150/150
800/800 [==============================] - 1s 688us/step - loss: 0.3321 - accuracy: 0.8635
In [129]:
display(best_parameters)
display(best_accuracy)
{'lrs': 0.001}
0.8605
In [ ]:
# using above parameters to create new model
In [130]:
# initialize ANN with sequential model 
ann_model2 = tf.keras.models.Sequential()
In [131]:
# adding input and first hidden layer use relu
ann_model2.add(tf.keras.layers.Dense(units=6, activation='relu'))
In [132]:
ann_model2.add(tf.keras.layers.Dense(units=1, activation='sigmoid'))
In [134]:
# Compile the ANN
sgd_optimizer = tf.keras.optimizers.SGD(lr=0.001)
ann_model2.compile(optimizer = sgd_optimizer, loss = 'binary_crossentropy', metrics = ['accuracy'])
In [138]:
# Fitting ANN to train set with batch size 10 and epocs as 150
model2_history = ann_model2.fit(X_train, y_train,validation_split=0.33,batch_size = 10, epochs = 150)
Epoch 1/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3553 - accuracy: 0.8556 - val_loss: 0.3718 - val_accuracy: 0.8482
Epoch 2/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3550 - accuracy: 0.8558 - val_loss: 0.3715 - val_accuracy: 0.8485
Epoch 3/150
536/536 [==============================] - 1s 967us/step - loss: 0.3547 - accuracy: 0.8558 - val_loss: 0.3713 - val_accuracy: 0.8489
Epoch 4/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3544 - accuracy: 0.8559 - val_loss: 0.3711 - val_accuracy: 0.8482
Epoch 5/150
536/536 [==============================] - 1s 979us/step - loss: 0.3542 - accuracy: 0.8561 - val_loss: 0.3708 - val_accuracy: 0.8489
Epoch 6/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3539 - accuracy: 0.8561 - val_loss: 0.3706 - val_accuracy: 0.8485
Epoch 7/150
536/536 [==============================] - 1s 966us/step - loss: 0.3536 - accuracy: 0.8567 - val_loss: 0.3704 - val_accuracy: 0.8485
Epoch 8/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3534 - accuracy: 0.8565 - val_loss: 0.3702 - val_accuracy: 0.8489
Epoch 9/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3531 - accuracy: 0.8569 - val_loss: 0.3700 - val_accuracy: 0.8485
Epoch 10/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3529 - accuracy: 0.8559 - val_loss: 0.3698 - val_accuracy: 0.8489
Epoch 11/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3526 - accuracy: 0.8569 - val_loss: 0.3696 - val_accuracy: 0.8489
Epoch 12/150
536/536 [==============================] - 1s 986us/step - loss: 0.3524 - accuracy: 0.8565 - val_loss: 0.3694 - val_accuracy: 0.8497
Epoch 13/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3521 - accuracy: 0.8572 - val_loss: 0.3692 - val_accuracy: 0.8497
Epoch 14/150
536/536 [==============================] - 1s 998us/step - loss: 0.3519 - accuracy: 0.8567 - val_loss: 0.3691 - val_accuracy: 0.8501
Epoch 15/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3517 - accuracy: 0.8572 - val_loss: 0.3689 - val_accuracy: 0.8497
Epoch 16/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3514 - accuracy: 0.8565 - val_loss: 0.3687 - val_accuracy: 0.8493
Epoch 17/150
536/536 [==============================] - 1s 992us/step - loss: 0.3512 - accuracy: 0.8571 - val_loss: 0.3686 - val_accuracy: 0.8489
Epoch 18/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3510 - accuracy: 0.8578 - val_loss: 0.3684 - val_accuracy: 0.8489
Epoch 19/150
536/536 [==============================] - 1s 990us/step - loss: 0.3508 - accuracy: 0.8569 - val_loss: 0.3682 - val_accuracy: 0.8482
Epoch 20/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3505 - accuracy: 0.8574 - val_loss: 0.3681 - val_accuracy: 0.8489
Epoch 21/150
536/536 [==============================] - 1s 956us/step - loss: 0.3503 - accuracy: 0.8580 - val_loss: 0.3680 - val_accuracy: 0.8489
Epoch 22/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3501 - accuracy: 0.8578 - val_loss: 0.3678 - val_accuracy: 0.8493
Epoch 23/150
536/536 [==============================] - 1s 946us/step - loss: 0.3499 - accuracy: 0.8584 - val_loss: 0.3677 - val_accuracy: 0.8485
Epoch 24/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3497 - accuracy: 0.8578 - val_loss: 0.3675 - val_accuracy: 0.8489
Epoch 25/150
536/536 [==============================] - 1s 991us/step - loss: 0.3496 - accuracy: 0.8580 - val_loss: 0.3674 - val_accuracy: 0.8489
Epoch 26/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3494 - accuracy: 0.8587 - val_loss: 0.3673 - val_accuracy: 0.8489
Epoch 27/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3492 - accuracy: 0.8587 - val_loss: 0.3671 - val_accuracy: 0.8485
Epoch 28/150
536/536 [==============================] - 1s 977us/step - loss: 0.3490 - accuracy: 0.8586 - val_loss: 0.3670 - val_accuracy: 0.8493
Epoch 29/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3488 - accuracy: 0.8587 - val_loss: 0.3669 - val_accuracy: 0.8493
Epoch 30/150
536/536 [==============================] - 1s 966us/step - loss: 0.3486 - accuracy: 0.8589 - val_loss: 0.3668 - val_accuracy: 0.8493
Epoch 31/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3484 - accuracy: 0.8593 - val_loss: 0.3667 - val_accuracy: 0.8493
Epoch 32/150
536/536 [==============================] - 1s 972us/step - loss: 0.3483 - accuracy: 0.8589 - val_loss: 0.3666 - val_accuracy: 0.8497
Epoch 33/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3481 - accuracy: 0.8591 - val_loss: 0.3665 - val_accuracy: 0.8497
Epoch 34/150
536/536 [==============================] - 1s 987us/step - loss: 0.3479 - accuracy: 0.8586 - val_loss: 0.3664 - val_accuracy: 0.8489
Epoch 35/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3478 - accuracy: 0.8589 - val_loss: 0.3663 - val_accuracy: 0.8493
Epoch 36/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3476 - accuracy: 0.8586 - val_loss: 0.3662 - val_accuracy: 0.8489
Epoch 37/150
536/536 [==============================] - 1s 959us/step - loss: 0.3474 - accuracy: 0.8593 - val_loss: 0.3661 - val_accuracy: 0.8489
Epoch 38/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3473 - accuracy: 0.8602 - val_loss: 0.3659 - val_accuracy: 0.8489
Epoch 39/150
536/536 [==============================] - 1s 979us/step - loss: 0.3471 - accuracy: 0.8593 - val_loss: 0.3658 - val_accuracy: 0.8493
Epoch 40/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3470 - accuracy: 0.8593 - val_loss: 0.3657 - val_accuracy: 0.8493
Epoch 41/150
536/536 [==============================] - 1s 961us/step - loss: 0.3468 - accuracy: 0.8595 - val_loss: 0.3656 - val_accuracy: 0.8493
Epoch 42/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3467 - accuracy: 0.8597 - val_loss: 0.3655 - val_accuracy: 0.8493
Epoch 43/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3465 - accuracy: 0.8595 - val_loss: 0.3654 - val_accuracy: 0.8489
Epoch 44/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3464 - accuracy: 0.8593 - val_loss: 0.3654 - val_accuracy: 0.8489
Epoch 45/150
536/536 [==============================] - 1s 954us/step - loss: 0.3462 - accuracy: 0.8599 - val_loss: 0.3653 - val_accuracy: 0.8489
Epoch 46/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3461 - accuracy: 0.8593 - val_loss: 0.3652 - val_accuracy: 0.8489
Epoch 47/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3460 - accuracy: 0.8599 - val_loss: 0.3651 - val_accuracy: 0.8489
Epoch 48/150
536/536 [==============================] - 1s 960us/step - loss: 0.3458 - accuracy: 0.8597 - val_loss: 0.3650 - val_accuracy: 0.8489
Epoch 49/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3457 - accuracy: 0.8593 - val_loss: 0.3649 - val_accuracy: 0.8489
Epoch 50/150
536/536 [==============================] - 1s 970us/step - loss: 0.3455 - accuracy: 0.8595 - val_loss: 0.3649 - val_accuracy: 0.8493
Epoch 51/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3454 - accuracy: 0.8593 - val_loss: 0.3648 - val_accuracy: 0.8493
Epoch 52/150
536/536 [==============================] - 1s 995us/step - loss: 0.3453 - accuracy: 0.8595 - val_loss: 0.3647 - val_accuracy: 0.8497
Epoch 53/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3452 - accuracy: 0.8599 - val_loss: 0.3646 - val_accuracy: 0.8497
Epoch 54/150
536/536 [==============================] - 1s 958us/step - loss: 0.3450 - accuracy: 0.8595 - val_loss: 0.3645 - val_accuracy: 0.8497
Epoch 55/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3449 - accuracy: 0.8591 - val_loss: 0.3645 - val_accuracy: 0.8497
Epoch 56/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3448 - accuracy: 0.8595 - val_loss: 0.3644 - val_accuracy: 0.8497
Epoch 57/150
536/536 [==============================] - 1s 971us/step - loss: 0.3447 - accuracy: 0.8595 - val_loss: 0.3643 - val_accuracy: 0.8497
Epoch 58/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3446 - accuracy: 0.8599 - val_loss: 0.3642 - val_accuracy: 0.8493
Epoch 59/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3444 - accuracy: 0.8597 - val_loss: 0.3641 - val_accuracy: 0.8497
Epoch 60/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3443 - accuracy: 0.8597 - val_loss: 0.3640 - val_accuracy: 0.8504
Epoch 61/150
536/536 [==============================] - 0s 915us/step - loss: 0.3442 - accuracy: 0.8599 - val_loss: 0.3639 - val_accuracy: 0.8504
Epoch 62/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3441 - accuracy: 0.8597 - val_loss: 0.3639 - val_accuracy: 0.8504
Epoch 63/150
536/536 [==============================] - 1s 979us/step - loss: 0.3440 - accuracy: 0.8600 - val_loss: 0.3638 - val_accuracy: 0.8504
Epoch 64/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3439 - accuracy: 0.8599 - val_loss: 0.3637 - val_accuracy: 0.8501
Epoch 65/150
536/536 [==============================] - 0s 922us/step - loss: 0.3438 - accuracy: 0.8604 - val_loss: 0.3636 - val_accuracy: 0.8504
Epoch 66/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3437 - accuracy: 0.8606 - val_loss: 0.3636 - val_accuracy: 0.8508
Epoch 67/150
536/536 [==============================] - 1s 983us/step - loss: 0.3436 - accuracy: 0.8602 - val_loss: 0.3635 - val_accuracy: 0.8512
Epoch 68/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3435 - accuracy: 0.8604 - val_loss: 0.3634 - val_accuracy: 0.8512
Epoch 69/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3434 - accuracy: 0.8604 - val_loss: 0.3634 - val_accuracy: 0.8512
Epoch 70/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3433 - accuracy: 0.8604 - val_loss: 0.3633 - val_accuracy: 0.8516
Epoch 71/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3432 - accuracy: 0.8602 - val_loss: 0.3632 - val_accuracy: 0.8516
Epoch 72/150
536/536 [==============================] - 1s 953us/step - loss: 0.3431 - accuracy: 0.8606 - val_loss: 0.3632 - val_accuracy: 0.8512
Epoch 73/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3430 - accuracy: 0.8600 - val_loss: 0.3631 - val_accuracy: 0.8512
Epoch 74/150
536/536 [==============================] - 1s 963us/step - loss: 0.3429 - accuracy: 0.8604 - val_loss: 0.3630 - val_accuracy: 0.8512
Epoch 75/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3428 - accuracy: 0.8600 - val_loss: 0.3630 - val_accuracy: 0.8512
Epoch 76/150
536/536 [==============================] - 1s 952us/step - loss: 0.3428 - accuracy: 0.8606 - val_loss: 0.3629 - val_accuracy: 0.8512
Epoch 77/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3427 - accuracy: 0.8606 - val_loss: 0.3629 - val_accuracy: 0.8508
Epoch 78/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3426 - accuracy: 0.8606 - val_loss: 0.3628 - val_accuracy: 0.8508
Epoch 79/150
536/536 [==============================] - 1s 945us/step - loss: 0.3425 - accuracy: 0.8604 - val_loss: 0.3628 - val_accuracy: 0.8504
Epoch 80/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3424 - accuracy: 0.8602 - val_loss: 0.3627 - val_accuracy: 0.8504
Epoch 81/150
536/536 [==============================] - 1s 964us/step - loss: 0.3423 - accuracy: 0.8608 - val_loss: 0.3627 - val_accuracy: 0.8504
Epoch 82/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3423 - accuracy: 0.8600 - val_loss: 0.3626 - val_accuracy: 0.8504
Epoch 83/150
536/536 [==============================] - 1s 963us/step - loss: 0.3422 - accuracy: 0.8606 - val_loss: 0.3626 - val_accuracy: 0.8508
Epoch 84/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3421 - accuracy: 0.8602 - val_loss: 0.3625 - val_accuracy: 0.8512
Epoch 85/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3420 - accuracy: 0.8602 - val_loss: 0.3625 - val_accuracy: 0.8512
Epoch 86/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3419 - accuracy: 0.8606 - val_loss: 0.3625 - val_accuracy: 0.8512
Epoch 87/150
536/536 [==============================] - 0s 924us/step - loss: 0.3419 - accuracy: 0.8604 - val_loss: 0.3624 - val_accuracy: 0.8512
Epoch 88/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3418 - accuracy: 0.8610 - val_loss: 0.3624 - val_accuracy: 0.8512
Epoch 89/150
536/536 [==============================] - 1s 979us/step - loss: 0.3417 - accuracy: 0.8608 - val_loss: 0.3623 - val_accuracy: 0.8512
Epoch 90/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3417 - accuracy: 0.8606 - val_loss: 0.3623 - val_accuracy: 0.8516
Epoch 91/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3416 - accuracy: 0.8606 - val_loss: 0.3622 - val_accuracy: 0.8516
Epoch 92/150
536/536 [==============================] - 1s 980us/step - loss: 0.3415 - accuracy: 0.8610 - val_loss: 0.3622 - val_accuracy: 0.8516
Epoch 93/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3415 - accuracy: 0.8608 - val_loss: 0.3622 - val_accuracy: 0.8516
Epoch 94/150
536/536 [==============================] - 1s 954us/step - loss: 0.3414 - accuracy: 0.8600 - val_loss: 0.3622 - val_accuracy: 0.8512
Epoch 95/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3413 - accuracy: 0.8608 - val_loss: 0.3621 - val_accuracy: 0.8512
Epoch 96/150
536/536 [==============================] - 1s 962us/step - loss: 0.3413 - accuracy: 0.8602 - val_loss: 0.3621 - val_accuracy: 0.8512
Epoch 97/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3412 - accuracy: 0.8610 - val_loss: 0.3621 - val_accuracy: 0.8512
Epoch 98/150
536/536 [==============================] - 1s 957us/step - loss: 0.3412 - accuracy: 0.8606 - val_loss: 0.3620 - val_accuracy: 0.8512
Epoch 99/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3411 - accuracy: 0.8610 - val_loss: 0.3620 - val_accuracy: 0.8512
Epoch 100/150
536/536 [==============================] - 1s 940us/step - loss: 0.3410 - accuracy: 0.8612 - val_loss: 0.3620 - val_accuracy: 0.8512
Epoch 101/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3410 - accuracy: 0.8606 - val_loss: 0.3619 - val_accuracy: 0.8519
Epoch 102/150
536/536 [==============================] - 1s 941us/step - loss: 0.3409 - accuracy: 0.8608 - val_loss: 0.3619 - val_accuracy: 0.8519
Epoch 103/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3409 - accuracy: 0.8608 - val_loss: 0.3619 - val_accuracy: 0.8519
Epoch 104/150
536/536 [==============================] - 1s 987us/step - loss: 0.3408 - accuracy: 0.8610 - val_loss: 0.3619 - val_accuracy: 0.8516
Epoch 105/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3407 - accuracy: 0.8614 - val_loss: 0.3618 - val_accuracy: 0.8516
Epoch 106/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3407 - accuracy: 0.8612 - val_loss: 0.3618 - val_accuracy: 0.8516
Epoch 107/150
536/536 [==============================] - 1s 968us/step - loss: 0.3406 - accuracy: 0.8610 - val_loss: 0.3618 - val_accuracy: 0.8516
Epoch 108/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3406 - accuracy: 0.8608 - val_loss: 0.3618 - val_accuracy: 0.8512
Epoch 109/150
536/536 [==============================] - 1s 948us/step - loss: 0.3405 - accuracy: 0.8608 - val_loss: 0.3618 - val_accuracy: 0.8512
Epoch 110/150
536/536 [==============================] - 1s 992us/step - loss: 0.3405 - accuracy: 0.8610 - val_loss: 0.3618 - val_accuracy: 0.8512
Epoch 111/150
536/536 [==============================] - 1s 961us/step - loss: 0.3404 - accuracy: 0.8606 - val_loss: 0.3618 - val_accuracy: 0.8512
Epoch 112/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3404 - accuracy: 0.8617 - val_loss: 0.3617 - val_accuracy: 0.8512
Epoch 113/150
536/536 [==============================] - 1s 945us/step - loss: 0.3403 - accuracy: 0.8615 - val_loss: 0.3617 - val_accuracy: 0.8516
Epoch 114/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3403 - accuracy: 0.8614 - val_loss: 0.3617 - val_accuracy: 0.8519
Epoch 115/150
536/536 [==============================] - 1s 942us/step - loss: 0.3402 - accuracy: 0.8617 - val_loss: 0.3617 - val_accuracy: 0.8519
Epoch 116/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3402 - accuracy: 0.8617 - val_loss: 0.3616 - val_accuracy: 0.8519
Epoch 117/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3401 - accuracy: 0.8615 - val_loss: 0.3616 - val_accuracy: 0.8519
Epoch 118/150
536/536 [==============================] - 1s 987us/step - loss: 0.3401 - accuracy: 0.8615 - val_loss: 0.3616 - val_accuracy: 0.8519
Epoch 119/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3400 - accuracy: 0.8625 - val_loss: 0.3616 - val_accuracy: 0.8523
Epoch 120/150
536/536 [==============================] - 1s 967us/step - loss: 0.3400 - accuracy: 0.8617 - val_loss: 0.3615 - val_accuracy: 0.8519
Epoch 121/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3399 - accuracy: 0.8619 - val_loss: 0.3615 - val_accuracy: 0.8523
Epoch 122/150
536/536 [==============================] - 1s 939us/step - loss: 0.3399 - accuracy: 0.8619 - val_loss: 0.3615 - val_accuracy: 0.8523
Epoch 123/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3398 - accuracy: 0.8615 - val_loss: 0.3615 - val_accuracy: 0.8519
Epoch 124/150
536/536 [==============================] - 1s 941us/step - loss: 0.3398 - accuracy: 0.8614 - val_loss: 0.3615 - val_accuracy: 0.8523
Epoch 125/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3397 - accuracy: 0.8628 - val_loss: 0.3615 - val_accuracy: 0.8523
Epoch 126/150
536/536 [==============================] - 1s 974us/step - loss: 0.3397 - accuracy: 0.8621 - val_loss: 0.3615 - val_accuracy: 0.8519
Epoch 127/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3396 - accuracy: 0.8621 - val_loss: 0.3615 - val_accuracy: 0.8516
Epoch 128/150
536/536 [==============================] - 1s 1000us/step - loss: 0.3396 - accuracy: 0.8617 - val_loss: 0.3614 - val_accuracy: 0.8519
Epoch 129/150
536/536 [==============================] - 1s 945us/step - loss: 0.3395 - accuracy: 0.8621 - val_loss: 0.3614 - val_accuracy: 0.8519
Epoch 130/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3395 - accuracy: 0.8619 - val_loss: 0.3614 - val_accuracy: 0.8516
Epoch 131/150
536/536 [==============================] - 1s 969us/step - loss: 0.3395 - accuracy: 0.8612 - val_loss: 0.3614 - val_accuracy: 0.8519
Epoch 132/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3394 - accuracy: 0.8623 - val_loss: 0.3614 - val_accuracy: 0.8519
Epoch 133/150
536/536 [==============================] - 1s 938us/step - loss: 0.3394 - accuracy: 0.8619 - val_loss: 0.3614 - val_accuracy: 0.8516
Epoch 134/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3393 - accuracy: 0.8621 - val_loss: 0.3614 - val_accuracy: 0.8523
Epoch 135/150
536/536 [==============================] - 1s 952us/step - loss: 0.3393 - accuracy: 0.8630 - val_loss: 0.3613 - val_accuracy: 0.8527
Epoch 136/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3392 - accuracy: 0.8619 - val_loss: 0.3613 - val_accuracy: 0.8523
Epoch 137/150
536/536 [==============================] - 1s 956us/step - loss: 0.3392 - accuracy: 0.8623 - val_loss: 0.3613 - val_accuracy: 0.8523
Epoch 138/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3392 - accuracy: 0.8628 - val_loss: 0.3613 - val_accuracy: 0.8527
Epoch 139/150
536/536 [==============================] - 1s 959us/step - loss: 0.3391 - accuracy: 0.8615 - val_loss: 0.3614 - val_accuracy: 0.8516
Epoch 140/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3391 - accuracy: 0.8623 - val_loss: 0.3614 - val_accuracy: 0.8519
Epoch 141/150
536/536 [==============================] - 1s 993us/step - loss: 0.3390 - accuracy: 0.8625 - val_loss: 0.3613 - val_accuracy: 0.8512
Epoch 142/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3390 - accuracy: 0.8619 - val_loss: 0.3613 - val_accuracy: 0.8523
Epoch 143/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3390 - accuracy: 0.8625 - val_loss: 0.3613 - val_accuracy: 0.8519
Epoch 144/150
536/536 [==============================] - 1s 956us/step - loss: 0.3389 - accuracy: 0.8630 - val_loss: 0.3613 - val_accuracy: 0.8519
Epoch 145/150
536/536 [==============================] - 1s 994us/step - loss: 0.3389 - accuracy: 0.8621 - val_loss: 0.3613 - val_accuracy: 0.8519
Epoch 146/150
536/536 [==============================] - 1s 964us/step - loss: 0.3389 - accuracy: 0.8621 - val_loss: 0.3613 - val_accuracy: 0.8519
Epoch 147/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3388 - accuracy: 0.8617 - val_loss: 0.3613 - val_accuracy: 0.8519
Epoch 148/150
536/536 [==============================] - 1s 954us/step - loss: 0.3388 - accuracy: 0.8625 - val_loss: 0.3613 - val_accuracy: 0.8516
Epoch 149/150
536/536 [==============================] - 1s 1ms/step - loss: 0.3388 - accuracy: 0.8627 - val_loss: 0.3612 - val_accuracy: 0.8516
Epoch 150/150
536/536 [==============================] - 1s 937us/step - loss: 0.3387 - accuracy: 0.8627 - val_loss: 0.3612 - val_accuracy: 0.8516
In [139]:
# Visualize accuracy of model 
plt.plot(model2_history.history['accuracy'])
plt.plot(model2_history.history['val_accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()
In [140]:
# vizualize the loss
plt.plot(model2_history.history['loss'])
plt.plot(model2_history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper right')
plt.show()
In [141]:
# Predict accuracy with threshold of 0.5
y_pred = ann_model2.predict(X_test)
y_pred =(y_pred > 0.5)
In [142]:
# create confusion matrix
from sklearn.metrics import confusion_matrix, accuracy_score, classification_report
cm = confusion_matrix(y_test, y_pred)
print(cm)
print(accuracy_score(y_test, y_pred))
[[1529   66]
 [ 210  195]]
0.862
In [143]:
cr=metrics.classification_report(y_test,y_pred)
print(cr)
              precision    recall  f1-score   support

           0       0.88      0.96      0.92      1595
           1       0.75      0.48      0.59       405

    accuracy                           0.86      2000
   macro avg       0.81      0.72      0.75      2000
weighted avg       0.85      0.86      0.85      2000

In [ ]:
# with the change in the hyper params we are able to improve the model accuracy from 0.852 to 0.862

#Germans and french people more likely to exit 
# Women have more exit rate 
# Customer who has credit card are more likely to leave 
# Not active customer is leaving 
# One year old customers are more likely to leave
#The average age group of exited customers is 45 years 
# 650 is avearge credit score of existed customer 
# 86000 is average balance & 100000 is average salary of exited cust.
# Lot of customers have zero account balance.